I'm gonna overwrite a lot of this notebook's old content. I changed the way I'm calculating wt, and wanna test that my training worked.


In [1]:
from pearce.emulator import *
from pearce.mocks import cat_dict
import numpy as np
from os import path

In [2]:
import matplotlib
#matplotlib.use('Agg')
from matplotlib import pyplot as plt
%matplotlib inline
import seaborn as sns
sns.set()

In [3]:
training_file = '/home/users/swmclau2/scratch/xi_zheng07_cosmo_lowmsat/PearceRedMagicXiCosmoFixedNd.hdf5'
test_file = '/home/users/swmclau2/scratch/xi_zheng07_cosmo_test_lowmsat2/PearceRedMagicXiCosmoFixedNd_Test.hdf5'

em_method = 'gp'

In [4]:
a = 1.0
z = 1.0/a - 1.0

In [5]:
scale_bin_centers = np.array([  0.09581734,   0.13534558,   0.19118072,   0.27004994,
         0.38145568,   0.53882047,   0.76110414,   1.07508818,
         1.51860241,   2.14508292,   3.03001016,   4.28000311,
         6.04566509,   8.53972892,  12.06268772,  17.0389993 ,
        24.06822623,  33.99727318])

In [6]:
fixed_params = {'z':z, 'r': scale_bin_centers[-2]}

In [7]:
from george.kernels import *
N = 11#4
kernel = 1.0*ExpSquaredKernel(np.ones(N,), ndim = N)# + LinearKernel(ndim = N, order = 1, log_gamma2 = 0.0)
#kernel = DotProductKernel(ndim = N)
emu = SpicyBuffalo(training_file, method = em_method, fixed_params=fixed_params, custom_mean_function = None, downsample_factor = 1.0, hyperparams = {'kernel': kernel})

In [8]:
emu = OriginalRecipe(training_file, method = em_method, fixed_params=fixed_params,\
                     downsample_factor=0.01, hyperparams = {'kernel': kernel})

In [9]:
emu.downsample_x.shape


Out[9]:
(400, 11)

In [10]:
emu.get_param_names()


Out[10]:
['ombh2',
 'omch2',
 'w0',
 'ns',
 'ln10As',
 'H0',
 'Neff',
 'logM0',
 'sigma_logM',
 'logM1',
 'alpha']
emu._emulator.set_parameter_vector(v)

In [11]:
emu.train_metric()


Out[11]:
      fun: -860.3906083536198
 hess_inv: array([[  1.91614533e-02,   5.52619462e-03,   9.39203550e-03,
          1.64675817e-02,   2.23739354e-02,   3.83819714e-03,
         -7.37729135e+02,   1.27803897e-02,   4.25042486e-03,
          4.40869863e-03,   2.38467972e-03,   6.71167740e-03],
       [  5.52619462e-03,   5.60625251e-02,   2.42462421e-03,
         -5.08357514e-03,   4.76228175e-03,   2.14207901e-03,
         -2.64591595e+02,  -1.87428474e-03,  -4.47162874e-03,
         -2.77247423e-03,  -3.75089303e-03,   7.49168714e-03],
       [  9.39203550e-03,   2.42462421e-03,   5.96168342e-02,
         -1.48806643e-02,   1.03776830e-02,  -9.75109975e-03,
         -3.64276253e+01,   1.05520214e-02,   3.03916294e-03,
         -1.79662320e-02,   1.28865250e-03,   4.47095197e-03],
       [  1.64675817e-02,  -5.08357514e-03,  -1.48806643e-02,
          1.27543437e-01,   1.76240602e-02,  -3.94450907e-03,
         -4.88466192e+02,   1.25331624e-02,  -4.69035969e-03,
          2.58486126e-02,  -6.97060429e-04,   6.65161809e-03],
       [  2.23739354e-02,   4.76228175e-03,   1.03776830e-02,
          1.76240602e-02,   2.41310562e-01,  -6.00648028e-03,
          9.88201378e+02,   1.90020681e-02,  -7.99432564e-03,
          2.99577935e-03,   4.69144463e-03,   7.16590333e-03],
       [  3.83819714e-03,   2.14207901e-03,  -9.75109975e-03,
         -3.94450907e-03,  -6.00648028e-03,   2.76836701e-02,
         -3.91586214e+02,  -5.63805100e-03,  -1.09501815e-03,
         -1.31390816e-03,   2.98412467e-03,  -6.15580264e-03],
       [ -7.37729135e+02,  -2.64591595e+02,  -3.64276253e+01,
         -4.88466192e+02,   9.88201378e+02,  -3.91586214e+02,
          1.99602768e+09,  -9.12831579e+02,   1.05775684e+01,
         -6.95158299e+02,  -3.45209260e+02,   6.47525310e+01],
       [  1.27803897e-02,  -1.87428474e-03,   1.05520214e-02,
          1.25331624e-02,   1.90020681e-02,  -5.63805100e-03,
         -9.12831579e+02,   8.12692469e-02,   3.15831479e-04,
         -1.05717173e-02,   1.29776469e-03,  -2.83847863e-03],
       [  4.25042486e-03,  -4.47162874e-03,   3.03916294e-03,
         -4.69035969e-03,  -7.99432564e-03,  -1.09501815e-03,
          1.05775684e+01,   3.15831479e-04,   1.53915669e-02,
         -7.77698079e-03,  -2.48391196e-03,  -1.82515170e-03],
       [  4.40869863e-03,  -2.77247423e-03,  -1.79662320e-02,
          2.58486126e-02,   2.99577935e-03,  -1.31390816e-03,
         -6.95158299e+02,  -1.05717173e-02,  -7.77698079e-03,
          1.19888899e-01,  -8.95518946e-03,   7.72842092e-03],
       [  2.38467972e-03,  -3.75089303e-03,   1.28865250e-03,
         -6.97060429e-04,   4.69144463e-03,   2.98412467e-03,
         -3.45209260e+02,   1.29776469e-03,  -2.48391196e-03,
         -8.95518946e-03,   1.16970263e-02,  -7.04594831e-03],
       [  6.71167740e-03,   7.49168714e-03,   4.47095197e-03,
          6.65161809e-03,   7.16590333e-03,  -6.15580264e-03,
          6.47525310e+01,  -2.83847863e-03,  -1.82515170e-03,
          7.72842092e-03,  -7.04594831e-03,   4.01067966e-02]])
      jac: array([ -1.63062899e-06,  -1.37998927e-06,  -2.17472299e-06,
        -5.94777042e-07,   2.05118482e-07,  -3.71314691e-06,
        -1.67390467e-10,  -2.12959597e-07,   8.41421952e-06,
         1.39589010e-06,   6.40833592e-06,   4.23077259e-06])
  message: 'Optimization terminated successfully.'
     nfev: 68
      nit: 58
     njev: 68
   status: 0
  success: True
        x: array([ -5.44436106,   3.4589724 ,   3.4149061 ,   3.95878809,
         5.89084074,   2.49834517,  29.19210482,   4.68917248,
         0.36975854,   4.76176797,   0.07218419,   3.17906026])

In [12]:
acc = emu.goodness_of_fit(test_file, statistic = 'frac')

In [13]:
print acc.mean()


0.0374242313921
y_pred, y_data = emu.goodness_of_fit(test_file, statistic=None)
plt_idx = 0 for r, yp, yd in zip(emu.scale_bin_centers, y_pred, y_data): plt.scatter(r, yp[plt_idx], color = 'r') plt.scatter(r, yd[plt_idx], color = 'b') plt.xscale('log') plt.show();

In [ ]: